#include <xen/config.h>
-/*
- * These have to be done with inline assembly: that way the bit-setting
- * is guaranteed to be atomic. All bit operations return 0 if the bit
- * was cleared before the operation and != 0 if it was not.
- *
- * bit 0 is the LSB of addr; bit 32 is the LSB of (addr+1).
- */
-
#ifdef CONFIG_SMP
#define LOCK_PREFIX "lock ; "
#else
#define LOCK_PREFIX ""
#endif
+/*
+ * We use the "+m" constraint because the memory operand is both read from
+ * and written to. Since the operand is in fact a word array, we also
+ * specify "memory" in the clobbers list to indicate that words other than
+ * the one directly addressed by the memory operand may be modified.
+ */
+
#define ADDR (*(volatile long *) addr)
/**
{
__asm__ __volatile__( LOCK_PREFIX
"btsl %1,%0"
- :"=m" (ADDR)
- :"dIr" (nr));
+ :"+m" (ADDR)
+ :"dIr" (nr) : "memory");
}
/**
{
__asm__(
"btsl %1,%0"
- :"=m" (ADDR)
- :"dIr" (nr));
+ :"+m" (ADDR)
+ :"dIr" (nr) : "memory");
}
/**
{
__asm__ __volatile__( LOCK_PREFIX
"btrl %1,%0"
- :"=m" (ADDR)
- :"dIr" (nr));
+ :"+m" (ADDR)
+ :"dIr" (nr) : "memory");
}
/**
{
__asm__(
"btrl %1,%0"
- :"=m" (ADDR)
- :"dIr" (nr));
+ :"+m" (ADDR)
+ :"dIr" (nr) : "memory");
}
#define smp_mb__before_clear_bit() barrier()
{
__asm__ __volatile__(
"btcl %1,%0"
- :"=m" (ADDR)
- :"dIr" (nr));
+ :"+m" (ADDR)
+ :"dIr" (nr) : "memory");
}
/**
{
__asm__ __volatile__( LOCK_PREFIX
"btcl %1,%0"
- :"=m" (ADDR)
- :"dIr" (nr));
+ :"+m" (ADDR)
+ :"dIr" (nr) : "memory");
}
/**
__asm__ __volatile__( LOCK_PREFIX
"btsl %2,%1\n\tsbbl %0,%0"
- :"=r" (oldbit),"=m" (ADDR)
+ :"=r" (oldbit),"+m" (ADDR)
:"dIr" (nr) : "memory");
return oldbit;
}
__asm__(
"btsl %2,%1\n\tsbbl %0,%0"
- :"=r" (oldbit),"=m" (ADDR)
- :"dIr" (nr));
+ :"=r" (oldbit),"+m" (ADDR)
+ :"dIr" (nr) : "memory");
return oldbit;
}
__asm__ __volatile__( LOCK_PREFIX
"btrl %2,%1\n\tsbbl %0,%0"
- :"=r" (oldbit),"=m" (ADDR)
+ :"=r" (oldbit),"+m" (ADDR)
:"dIr" (nr) : "memory");
return oldbit;
}
__asm__(
"btrl %2,%1\n\tsbbl %0,%0"
- :"=r" (oldbit),"=m" (ADDR)
- :"dIr" (nr));
+ :"=r" (oldbit),"+m" (ADDR)
+ :"dIr" (nr) : "memory");
return oldbit;
}
__asm__ __volatile__(
"btcl %2,%1\n\tsbbl %0,%0"
- :"=r" (oldbit),"=m" (ADDR)
+ :"=r" (oldbit),"+m" (ADDR)
:"dIr" (nr) : "memory");
return oldbit;
}
__asm__ __volatile__( LOCK_PREFIX
"btcl %2,%1\n\tsbbl %0,%0"
- :"=r" (oldbit),"=m" (ADDR)
+ :"=r" (oldbit),"+m" (ADDR)
:"dIr" (nr) : "memory");
return oldbit;
}
static __inline__ int constant_test_bit(int nr, const volatile void * addr)
{
- return ((1UL << (nr & 31)) & (((const volatile unsigned int *) addr)[nr >> 5])) != 0;
+ return ((1U << (nr & 31)) & (((const volatile unsigned int *) addr)[nr >> 5])) != 0;
}
static __inline__ int variable_test_bit(int nr, volatile void * addr)